In [ ]:
! pip install pytorch
Collecting pytorch Downloading pytorch-1.0.2.tar.gz (689 bytes) Preparing metadata (setup.py): started Preparing metadata (setup.py): finished with status 'done' Building wheels for collected packages: pytorch Building wheel for pytorch (setup.py): started Building wheel for pytorch (setup.py): finished with status 'error' Running setup.py clean for pytorch Failed to build pytorch
error: subprocess-exited-with-error
× python setup.py bdist_wheel did not run successfully.
│ exit code: 1
╰─> [6 lines of output]
Traceback (most recent call last):
File "<string>", line 2, in <module>
File "<pip-setuptools-caller>", line 34, in <module>
File "C:\Users\91995\AppData\Local\Temp\pip-install-9s7gmyfv\pytorch_7da205f5c1264196b7c32083fc8e3a8e\setup.py", line 15, in <module>
raise Exception(message)
Exception: You tried to install "pytorch". The package named for PyTorch is "torch"
[end of output]
note: This error originates from a subprocess, and is likely not a problem with pip.
ERROR: Failed building wheel for pytorch
ERROR: Could not build wheels for pytorch, which is required to install pyproject.toml-based projects
In [ ]:
!pip install split-folders matplotlib opencv-python spicy
Collecting split-folders Downloading split_folders-0.5.1-py3-none-any.whl.metadata (6.2 kB) Requirement already satisfied: matplotlib in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (3.8.3) Requirement already satisfied: opencv-python in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (4.9.0.80) Collecting spicy Downloading spicy-0.16.0-py2.py3-none-any.whl.metadata (310 bytes) Requirement already satisfied: contourpy>=1.0.1 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (1.2.0) Requirement already satisfied: cycler>=0.10 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (0.12.1) Requirement already satisfied: fonttools>=4.22.0 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (4.48.1) Requirement already satisfied: kiwisolver>=1.3.1 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (1.4.5) Requirement already satisfied: numpy<2,>=1.21 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (1.26.2) Requirement already satisfied: packaging>=20.0 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (23.2) Requirement already satisfied: pillow>=8 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (10.2.0) Requirement already satisfied: pyparsing>=2.3.1 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (3.1.1) Requirement already satisfied: python-dateutil>=2.7 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from matplotlib) (2.8.2) Requirement already satisfied: scipy in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from spicy) (1.12.0) Requirement already satisfied: six>=1.5 in c:\users\91995\appdata\local\programs\python\python312\lib\site-packages (from python-dateutil>=2.7->matplotlib) (1.16.0) Downloading split_folders-0.5.1-py3-none-any.whl (8.4 kB) Downloading spicy-0.16.0-py2.py3-none-any.whl (1.7 kB) Installing collected packages: split-folders, spicy Successfully installed spicy-0.16.0 split-folders-0.5.1
In [ ]:
# Import Keras modules and its important APIs
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau
# from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
import numpy as np
import os
import torch
import torch.nn as nn
import numpy as np
import cv2 as cv
import os
import splitfolders
import matplotlib.pyplot as plt
# tensorflow
import tensorflow.keras as keras
import tensorflow as tf
# image processing
from tensorflow.keras.preprocessing import image
from tensorflow.keras.preprocessing.image import ImageDataGenerator, load_img
# model / neural network
from tensorflow.keras import layers
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.applications import ResNet50
from tensorflow.keras.applications.resnet50 import preprocess_input
In [ ]:
train_path="archive - Copy/Indian-monuments/images/train"
In [ ]:
splitfolders.ratio(train_path, output="archive - Copy/data-split", seed=1337, ratio=(0.7, 0.2, 0.1), group_prefix=None, move=False)
Copying files: 7909 files [09:34, 13.77 files/s]
In [ ]:
dicti_values={0:'Ajanta Caves',
1:'Charar E Sharif',
2:'Chhota Imambara',
3:'Ellora Caves',
4:'Fatehpur Sikri',
5:'Gateway of India',
6:'Humayun_s Tomb',
7:'India gate pics',
8:'Khajuraho',
9:'Sun Temple Konark',
10:'alai darwaza',
11:'alai minar',
12:'basilica of bom jesus',
13:'charminar',
14:'golden temple',
15:'hawa mahal pics',
16:'iron pillar',
17:'jamali kamali_tomb',
18:'lotus temple',
19:'mysore palace',
20:'qutub minar',
21:'tajmahal',
22:'tanjavur temple',
23:'victoria memorial'}
l=[]
for i in range(23):
l.append(dicti_values[i])
class_names=l
print(class_names)
['Ajanta Caves', 'Charar E Sharif', 'Chhota Imambara', 'Ellora Caves', 'Fatehpur Sikri', 'Gateway of India', 'Humayun_s Tomb', 'India gate pics', 'Khajuraho', 'Sun Temple Konark', 'alai darwaza', 'alai minar', 'basilica of bom jesus', 'charminar', 'golden temple', 'hawa mahal pics', 'iron pillar', 'jamali kamali_tomb', 'lotus temple', 'mysore palace', 'qutub minar', 'tajmahal', 'tanjavur temple']
In [ ]:
datagen = ImageDataGenerator()
In [ ]:
train_generator = datagen.flow_from_directory(
directory="archive - Copy/data-split/train",
classes = class_names,
target_size=(224, 224),
batch_size=32,
class_mode="binary",
)
valid_generator = datagen.flow_from_directory(
directory="archive - Copy/data-split/val",
classes = class_names,
target_size=(224, 224),
batch_size=32,
class_mode="binary",
)
# test data
test_generator = datagen.flow_from_directory(
directory="archive - Copy\data-split/test",
classes = class_names,
target_size=(224, 224),
batch_size=32,
class_mode="binary",
)
<>:18: SyntaxWarning: invalid escape sequence '\d' <>:18: SyntaxWarning: invalid escape sequence '\d' C:\Users\91995\AppData\Local\Temp\ipykernel_21876\1830802308.py:18: SyntaxWarning: invalid escape sequence '\d' directory="archive - Copy\data-split/test",
Found 2935 images belonging to 23 classes. Found 836 images belonging to 23 classes. Found 433 images belonging to 23 classes.
In [ ]:
resnet_50 = ResNet50(include_top=False, weights='imagenet', input_shape=(224,224,3))
for layer in resnet_50.layers:
layer.trainable = False
In [ ]:
x = resnet_50.output
x = layers.GlobalAveragePooling2D()(x)
x = layers.Dense(512, activation='relu')(x)
x = layers.Dropout(0.5)(x)
x = layers.Dense(256, activation='relu')(x)
x = layers.Dropout(0.5)(x)
x = layers.Dense(128, activation='relu')(x)
x = layers.Dropout(0.5)(x)
x = layers.Dense(64, activation='relu')(x)
x = layers.Dropout(0.5)(x)
predictions = layers.Dense(24, activation='softmax')(x)
model = Model(inputs = resnet_50.input, outputs = predictions)
In [ ]:
def trainModel(model, epochs, optimizer):
batch_size = 32
model.compile(optimizer=optimizer, loss="sparse_categorical_crossentropy", metrics=["accuracy"])
return model.fit(train_generator, validation_data=valid_generator, epochs=epochs, batch_size=batch_size)
In [ ]:
model_history = trainModel(model = model, epochs = 10, optimizer = "Adam")
Epoch 1/10
c:\Users\91995\AppData\Local\Programs\Python\Python312\Lib\site-packages\keras\src\trainers\data_adapters\py_dataset_adapter.py:122: UserWarning: Your `PyDataset` class should call `super().__init__(**kwargs)` in its constructor. `**kwargs` can include `workers`, `use_multiprocessing`, `max_queue_size`. Do not pass these arguments to `fit()`, as they will be ignored. self._warn_if_super_not_called()
92/92 ━━━━━━━━━━━━━━━━━━━━ 457s 5s/step - accuracy: 0.0709 - loss: 3.6351 - val_accuracy: 0.1304 - val_loss: 2.8854 Epoch 2/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 313s 3s/step - accuracy: 0.0819 - loss: 2.8926 - val_accuracy: 0.3373 - val_loss: 2.5024 Epoch 3/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 308s 3s/step - accuracy: 0.1695 - loss: 2.5316 - val_accuracy: 0.4402 - val_loss: 1.9049 Epoch 4/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 308s 3s/step - accuracy: 0.2662 - loss: 2.2203 - val_accuracy: 0.5239 - val_loss: 1.5518 Epoch 5/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 310s 3s/step - accuracy: 0.3480 - loss: 1.9105 - val_accuracy: 0.5550 - val_loss: 1.3470 Epoch 6/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 308s 3s/step - accuracy: 0.4223 - loss: 1.6903 - val_accuracy: 0.6053 - val_loss: 1.2092 Epoch 7/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 368s 4s/step - accuracy: 0.4768 - loss: 1.5353 - val_accuracy: 0.6651 - val_loss: 1.0374 Epoch 8/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 339s 4s/step - accuracy: 0.5408 - loss: 1.3158 - val_accuracy: 0.6639 - val_loss: 0.9502 Epoch 9/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 320s 3s/step - accuracy: 0.5772 - loss: 1.2917 - val_accuracy: 0.6926 - val_loss: 0.9173 Epoch 10/10 92/92 ━━━━━━━━━━━━━━━━━━━━ 319s 3s/step - accuracy: 0.6054 - loss: 1.1640 - val_accuracy: 0.6902 - val_loss: 0.8844
In [ ]:
model.save('resnet_model(2).h5')
WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`.
In [ ]:
loss_train_curve = model_history.history["loss"]
loss_val_curve = model_history.history["val_loss"]
plt.plot(loss_train_curve, label = "Train")
plt.plot(loss_val_curve, label = "Validation")
plt.legend(loc = 'upper right')
plt.title("Loss")
plt.show()
In [ ]:
acc_train_curve = model_history.history["accuracy"]
acc_val_curve = model_history.history["val_accuracy"]
plt.plot(acc_train_curve, label = "Train")
plt.plot(acc_val_curve, label = "Validation")
plt.legend(loc = 'lower right')
plt.title("Accuracy")
plt.show()
In [ ]:
test_loss, test_acc = model.evaluate(test_generator)
print("The test loss is: ", test_loss)
print("The best accuracy is: ", test_acc*100)
14/14 ━━━━━━━━━━━━━━━━━━━━ 40s 3s/step - accuracy: 0.6977 - loss: 0.9017 The test loss is: 0.8236600160598755 The best accuracy is: 71.59353494644165
In [ ]:
model.save('resnet_model.h5')
WARNING:absl:You are saving your model as an HDF5 file via `model.save()` or `keras.saving.save_model(model)`. This file format is considered legacy. We recommend using instead the native Keras format, e.g. `model.save('my_model.keras')` or `keras.saving.save_model(model, 'my_model.keras')`.
In [ ]:
import os
# import cv2
import pickle
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import keras
import tensorflow
from tensorflow.keras.models import Model
from tensorflow.keras.utils import plot_model
from tensorflow.keras.models import Sequential
from tensorflow.keras.applications import VGG19
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import Input, Lambda, Dense, Flatten, Dropout, BatchNormalization, Activation
from sklearn.metrics import confusion_matrix, classification_report, accuracy_score, recall_score, precision_score, f1_score
Defining data paths¶
In [ ]:
train_path = r'archive - Copy\Indian-monuments\images\train'
test_path = r'archive - Copy\Indian-monuments\images\test'
In [ ]:
for folder in os.listdir(train_path):
sub_path = train_path + "/" + folder
print(folder)
for i in range(1):
temp_path = os.listdir(sub_path)[i]
temp_path = sub_path + "/" + temp_path
img = mpimg.imread(temp_path)
imgplot = plt.imshow(img)
plt.show()
Ajanta Caves
alai_darwaza
alai_minar
basilica_of_bom_jesus
Charar-E- Sharif
charminar
Chhota_Imambara
Ellora Caves
Fatehpur Sikri
Gateway of India
golden temple
hawa mahal pics
Humayun_s Tomb
India gate pics
iron_pillar
jamali_kamali_tomb
Khajuraho
lotus_temple
mysore_palace
qutub_minar
Sun Temple Konark
tajmahal
tanjavur temple
victoria memorial
In [ ]:
def imagearray(path, size):
data = []
for folder in os.listdir(path):
sub_path=path+"/"+folder
for img in os.listdir(sub_path):
image_path=sub_path+"/"+img
img_arr=cv2.imread(image_path)
img_arr=cv2.resize(img_arr,size)
data.append(img_arr)
return data
In [ ]:
import cv2
size = (250,250)
train = imagearray(train_path, size)
test = imagearray(test_path, size)
In [ ]:
x_train = np.array(train)
x_test = np.array(test)
x_train.shape,x_test.shape
Out[ ]:
((7909, 250, 250, 3), (1045, 250, 250, 3))
In [ ]:
x_train = x_train/255.0
x_test = x_test/255.0
Defining target variables¶
In [ ]:
def data_class(data_path, size, class_mode):
datagen = ImageDataGenerator(rescale = 1./255)
classes = datagen.flow_from_directory(data_path,
target_size = size,
batch_size = 32,
class_mode = class_mode)
return classes
In [ ]:
size = (250,250)
train_class = data_class(train_path, size, 'sparse')
test_class = data_class(test_path, size, 'sparse')
Found 7909 images belonging to 24 classes. Found 1045 images belonging to 24 classes.
In [ ]:
y_train = train_class.classes
y_test = test_class.classes
train_d=train_class
test_d=test_class
print(train_class)
<keras.src.legacy.preprocessing.image.DirectoryIterator object at 0x0000022B8C89B380>
In [ ]:
train_class.class_indices
Out[ ]:
{'Ajanta Caves': 0,
'Charar-E- Sharif': 1,
'Chhota_Imambara': 2,
'Ellora Caves': 3,
'Fatehpur Sikri': 4,
'Gateway of India': 5,
'Humayun_s Tomb': 6,
'India gate pics': 7,
'Khajuraho': 8,
'Sun Temple Konark': 9,
'alai_darwaza': 10,
'alai_minar': 11,
'basilica_of_bom_jesus': 12,
'charminar': 13,
'golden temple': 14,
'hawa mahal pics': 15,
'iron_pillar': 16,
'jamali_kamali_tomb': 17,
'lotus_temple': 18,
'mysore_palace': 19,
'qutub_minar': 20,
'tajmahal': 21,
'tanjavur temple': 22,
'victoria memorial': 23}
In [ ]:
y_train.shape,y_test.shape
Out[ ]:
((7909,), (1045,))
In [ ]:
y_pred = model.predict(x_test)
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) Cell In[45], line 1 ----> 1 y_pred = model.predict(x_test) File c:\Users\91995\AppData\Local\Programs\Python\Python312\Lib\site-packages\keras\src\utils\traceback_utils.py:123, in filter_traceback.<locals>.error_handler(*args, **kwargs) 120 filtered_tb = _process_traceback_frames(e.__traceback__) 121 # To get the full stack trace, call: 122 # `keras.config.disable_traceback_filtering()` --> 123 raise e.with_traceback(filtered_tb) from None 124 finally: 125 del filtered_tb File c:\Users\91995\AppData\Local\Programs\Python\Python312\Lib\site-packages\keras\src\layers\input_spec.py:245, in assert_input_compatibility(input_spec, inputs, layer_name) 243 if spec_dim is not None and dim is not None: 244 if spec_dim != dim: --> 245 raise ValueError( 246 f'Input {input_index} of layer "{layer_name}" is ' 247 "incompatible with the layer: " 248 f"expected shape={spec.shape}, " 249 f"found shape={shape}" 250 ) ValueError: Input 0 of layer "functional_1" is incompatible with the layer: expected shape=(None, 224, 224, 3), found shape=(32, 250, 250, 3)
In [ ]:
y_pred=np.argmax(y_pred,axis=1)
In [ ]:
print(classification_report(y_pred,y_test))
In [ ]:
cm = confusion_matrix(y_pred,y_test)
plt.figure(figsize=(10, 8))
ax = plt.subplot()
sns.set(font_scale=2.0)
sns.heatmap(cm, annot=True, fmt='g', cmap="Blues", ax=ax);
# labels, title and ticks
ax.set_xlabel('Predicted labels', fontsize=20);
ax.set_ylabel('True labels', fontsize=20);
ax.set_title('Confusion Matrix', fontsize=20);
In [ ]:
f1_score(y_test, y_pred, average='micro')
In [ ]:
recall_score(y_test, y_pred, average='weighted')
In [ ]:
precision_score(y_test, y_pred, average='micro')